P1

8.1.)

Sigma <- matrix(c(5, 2, 2, 2), nrow = 2, ncol = 2 )
Sigma
##      [,1] [,2]
## [1,]    5    2
## [2,]    2    2
eigSigma = eigen(Sigma)
eigSigma$values
## [1] 6 1
eigSigma$vectors
##            [,1]       [,2]
## [1,] -0.8944272  0.4472136
## [2,] -0.4472136 -0.8944272

The population principal components are: $$ Y_1 = -0.8944X_1 + 0.4472X_2\ Y_2 = -0.4472X_1 - 0.8944X_2\

$$ The proportion of total variance explained by \(Y_1\) is:

eigSigma$values[1] / sum(eigSigma$values)
## [1] 0.8571429

8.2.) a.)

D = diag(sqrt(diag(Sigma)))
D
##          [,1]     [,2]
## [1,] 2.236068 0.000000
## [2,] 0.000000 1.414214
corr = solve(D) %*% Sigma %*% solve(D)
corr
##           [,1]      [,2]
## [1,] 1.0000000 0.6324555
## [2,] 0.6324555 1.0000000
eigCorr = eigen(corr)
eigCorr$values
## [1] 1.6324555 0.3675445
eigCorr$vectors
##            [,1]       [,2]
## [1,] -0.7071068  0.7071068
## [2,] -0.7071068 -0.7071068

The population principal components are: \[ Y_1 = -0.7071X_1 + 0.7071X_2\\ Y_2 = -0.7071X_1 - 0.7071X_2\\ \] The proportion of total variance explained by \(Y_1\) is:

eigCorr$values[1] / sum(eigCorr$values)
## [1] 0.8162278

b.) As we can see above, the estiamtes are not the same. When we standardize our variables, the eigenvectors become identical. Since we have now controlled each eigenvector to have the same mean and variance, they contribute equally, unlike before. c.)

#PDF Pg 460
rho1 <- eigCorr$vectors[1,1] * sqrt(eigCorr$values[1])
rho1
## [1] -0.9034532
rho12 <- eigCorr$vectors[1,2] * sqrt(eigCorr$values[1])
rho12
## [1] 0.9034532
rho2 <- eigCorr$vectors[2,2] * sqrt(eigCorr$values[2])
rho2
## [1] -0.4286866

P2

8.10 a.) and b.)

stock <- read.table("~/GitHub/STA135/Homework/HW5/T8-4.dat")
names(stock) <- c("JP Morgan", "Citibank", "Wells Fargo", "Royal Dutch Shell", "Exxon Mobil")
stock
##      JP Morgan   Citibank Wells Fargo Royal Dutch Shell Exxon Mobil
## 1    0.0130338 -0.0078431  -0.0031889        -0.0447693   0.0052151
## 2    0.0084862  0.0166886  -0.0062100         0.0119560   0.0134890
## 3   -0.0179153 -0.0086393   0.0100360         0.0000000  -0.0061428
## 4    0.0215589 -0.0034858   0.0174353        -0.0285917  -0.0069534
## 5    0.0108225  0.0037167  -0.0101345         0.0291900   0.0409751
## 6    0.0101713 -0.0121978  -0.0083768         0.0137083   0.0029895
## 7    0.0111288  0.0280044   0.0080721         0.0305433   0.0032290
## 8    0.0484801 -0.0051480   0.0182495         0.0063348   0.0076752
## 9   -0.0344914 -0.0137991  -0.0080468        -0.0299011  -0.0108108
## 10  -0.0046596  0.0209882  -0.0060841        -0.0203940  -0.0126677
## 11  -0.0018205 -0.0055675  -0.0107587        -0.0089898  -0.0183648
## 12   0.0148515  0.0346684  -0.0060004         0.0362855   0.0287032
## 13  -0.0092426 -0.0052029   0.0047161         0.0264916   0.0129547
## 14  -0.0458668 -0.0278243  -0.0142696         0.0374776   0.0332022
## 15  -0.0244432 -0.0182914   0.0059048        -0.0259572  -0.0202333
## 16  -0.0183742 -0.0140289   0.0011361         0.0073284  -0.0097182
## 17  -0.0297788 -0.0284571  -0.0164555         0.0310847   0.0164377
## 18  -0.0225080 -0.0228833   0.0344231        -0.0062006   0.0067584
## 19   0.0119617 -0.0067916   0.0185908        -0.0193632  -0.0153440
## 20   0.0209811  0.0240509   0.0129586         0.0355419   0.0150962
## 21   0.0118669  0.0025328  -0.0036036         0.0021186   0.0028784
## 22   0.0140160  0.0172255   0.0003617         0.0150106   0.0141115
## 23  -0.0149506  0.0031610  -0.0001808         0.0310352   0.0226415
## 24   0.0203322 -0.0148548  -0.0182607        -0.0028283  -0.0161439
## 25   0.0112265 -0.0221613  -0.0051565        -0.0247164   0.0105485
## 26  -0.0327505 -0.0158879  -0.0037023         0.0143332   0.0164695
## 27  -0.0261119 -0.0313390   0.0156076         0.0024575   0.0082154
## 28   0.0182675  0.0156863  -0.0219539        -0.0498468  -0.0110910
## 29   0.0219907  0.0043436   0.0136551         0.0152655   0.0217441
## 30  -0.0331257 -0.0204229  -0.0101495        -0.0186362  -0.0255376
## 31   0.0213763  0.0188864   0.0210664         0.0228744   0.0013793
## 32   0.0484518  0.0440539   0.0087639         0.0160338   0.0073462
## 33   0.0276183  0.0168319   0.0104977         0.0004153   0.0043300
## 34   0.0031932  0.0024943   0.0103887         0.0228311   0.0356251
## 35  -0.0010610  0.0085953  -0.0023046        -0.0040584   0.0065732
## 36  -0.0037175 -0.0060552   0.0035537         0.0114099   0.0211145
## 37   0.0023987 -0.0597924  -0.0118626        -0.0251813  -0.0110851
## 38   0.0148897  0.0163187   0.0265185         0.0200455   0.0219875
## 39  -0.0089075 -0.0068477   0.0047129         0.0129660   0.0196161
## 40  -0.0227333 -0.0140276  -0.0069493         0.0024000  -0.0165494
## 41  -0.0329997 -0.0313480  -0.0362141         0.0055866  -0.0065208
## 42   0.0302098  0.0522778   0.0317662         0.0267857   0.0105865
## 43   0.0195493  0.0395079   0.0381773         0.0216425   0.0238843
## 44  -0.0045273  0.0204825   0.0174547         0.0253452   0.0059341
## 45  -0.0446763 -0.0408118  -0.0163225        -0.0035049  -0.0008137
## 46   0.0070008  0.0060451   0.0154081         0.0320252   0.0252443
## 47   0.0100111  0.0048532  -0.0016675        -0.0050224  -0.0266084
## 48  -0.0112885  0.0057498   0.0100217        -0.0173067  -0.0024480
## 49   0.0236703  0.0155500  -0.0162064         0.0001835  -0.0069530
## 50   0.0165941  0.0457104   0.0065557         0.0284299   0.0434514
## 51  -0.0040139 -0.0118432  -0.0041750         0.0039237  -0.0136175
## 52  -0.0069855  0.0098061   0.0003354        -0.0261148  -0.0286114
## 53  -0.0154221 -0.0233060  -0.0238055         0.0113097   0.0257467
## 54  -0.0252817  0.0088378  -0.0094453         0.0075758  -0.0124498
## 55   0.0039470  0.0094174   0.0067614         0.0241676   0.0164701
## 56   0.0188149  0.0379692   0.0154985         0.0510400   0.0784157
## 57  -0.0055127 -0.0075251  -0.0111921        -0.0044903   0.0198479
## 58  -0.0260532 -0.0168492  -0.0080604         0.0432676   0.0587486
## 59   0.0128059 -0.0059983   0.0013831         0.0148919   0.0649373
## 60   0.0146108  0.0025862   0.0100138         0.0362891   0.0048395
## 61  -0.0373858 -0.0126827  -0.0114530        -0.0272533  -0.0396532
## 62  -0.0028769 -0.0195950  -0.0070897        -0.0100172   0.0262454
## 63  -0.0300058 -0.0497446  -0.0167189        -0.0507510  -0.0583157
## 64  -0.0193337  0.0021033   0.0178888         0.0154897   0.0262930
## 65   0.0172884  0.0174907   0.0022620         0.0195178  -0.0089331
## 66  -0.0163983  0.0077928  -0.0072917        -0.0358752  -0.0636054
## 67   0.0275841  0.0125085  -0.0078699         0.0196896   0.0573919
## 68   0.0176991  0.0233603   0.0216816        -0.0127639  -0.0401924
## 69   0.0034783 -0.0079017   0.0050035         0.0071275   0.0100215
## 70  -0.0323512 -0.0146018  -0.0084120        -0.0482225  -0.0628987
## 71   0.0465672  0.0410867   0.0349723         0.0152170   0.0056721
## 72  -0.0071306 -0.0107828  -0.0086986         0.0303185   0.0517014
## 73  -0.0071818  0.0058862   0.0091124        -0.0064473   0.0060779
## 74  -0.0031829  0.0017339  -0.0006689         0.0064892   0.0214996
## 75   0.0182874 -0.0038944   0.0046854         0.0619937   0.0431379
## 76  -0.0142531 -0.0106429  -0.0141572         0.0001557  -0.0450225
## 77  -0.0046270 -0.0169045   0.0089542         0.0390661   0.0027938
## 78  -0.0072632  0.0075927   0.0000000        -0.0049431   0.0186314
## 79   0.0301434 -0.0019947   0.0261219        -0.0307090  -0.0208547
## 80  -0.0071023 -0.0430824  -0.0177872        -0.0518714   0.0230447
## 81  -0.0128755 -0.0109074  -0.0066456         0.0167076  -0.0126280
## 82   0.0028986  0.0030502  -0.0073591         0.0443048  -0.0112340
## 83  -0.0265896 -0.0002339  -0.0033698         0.0615551   0.0561091
## 84   0.0068290  0.0124006   0.0076078        -0.0419997  -0.0365773
## 85  -0.0259510 -0.0240351  -0.0303691        -0.0209345  -0.0068717
## 86   0.0136240  0.0182335   0.0086520         0.0568640   0.0387476
## 87   0.0209080  0.0165116   0.0089209        -0.0230172   0.0416320
## 88   0.0049737  0.0187600   0.0023805         0.0123049   0.0078337
## 89  -0.0262009 -0.0044914  -0.0166243        -0.0096353   0.0020622
## 90  -0.0041854  0.0060907  -0.0067276         0.0134710  -0.0045908
## 91   0.0090063 -0.0022422   0.0000000        -0.0429774  -0.0620229
## 92   0.0053555 -0.0083146   0.0069469        -0.0188272  -0.0161072
## 93   0.0307783 -0.0160888   0.0031045        -0.0539478  -0.0556609
## 94   0.0373241  0.0359281   0.0252751         0.0581879   0.0169708
## 95   0.0238029  0.0031125  -0.0068757         0.0122545   0.0281715
## 96   0.0256826  0.0525266   0.0406957        -0.0316623  -0.0188482
## 97  -0.0060622  0.0086334   0.0058413         0.0445584   0.0305941
## 98   0.0217449  0.0229645   0.0291983         0.0084395   0.0319296
## 99   0.0033740 -0.0153061  -0.0238245        -0.0016738  -0.0172270
## 100  0.0033626  0.0029016  -0.0030507        -0.0012193  -0.0097005
## 101  0.0170147  0.0095061   0.0181994        -0.0161758  -0.0075614
## 102  0.0103929 -0.0026612   0.0044290        -0.0024818  -0.0164502
## 103 -0.0127948 -0.0143678  -0.0187402        -0.0049759  -0.0163732
cov(stock)
##                      JP Morgan     Citibank  Wells Fargo Royal Dutch Shell
## JP Morgan         4.332695e-04 0.0002756679 1.590265e-04      6.411929e-05
## Citibank          2.756679e-04 0.0004387172 1.799737e-04      1.814512e-04
## Wells Fargo       1.590265e-04 0.0001799737 2.239722e-04      7.341348e-05
## Royal Dutch Shell 6.411929e-05 0.0001814512 7.341348e-05      7.224964e-04
## Exxon Mobil       8.896616e-05 0.0001232623 6.054612e-05      5.082772e-04
##                    Exxon Mobil
## JP Morgan         8.896616e-05
## Citibank          1.232623e-04
## Wells Fargo       6.054612e-05
## Royal Dutch Shell 5.082772e-04
## Exxon Mobil       7.656742e-04
#Using spectral decomposition instead of SVD
princomp(stock)$sdev ^ 2
##       Comp.1       Comp.2       Comp.3       Comp.4       Comp.5 
## 0.0013543996 0.0006943522 0.0002513383 0.0001412181 0.0001177325
sum(princomp(stock)$sdev ^ 2)
## [1] 0.002559041
summary(princomp(stock))
## Importance of components:
##                            Comp.1     Comp.2     Comp.3     Comp.4     Comp.5
## Standard deviation     0.03680217 0.02635056 0.01585365 0.01188352 0.01085046
## Proportion of Variance 0.52926066 0.27133298 0.09821584 0.05518400 0.04600652
## Cumulative Proportion  0.52926066 0.80059364 0.89880948 0.95399348 1.00000000
#Interpret
princomp(stock)$loadings
## 
## Loadings:
##                   Comp.1 Comp.2 Comp.3 Comp.4 Comp.5
## JP Morgan          0.223  0.625  0.326  0.663  0.118
## Citibank           0.307  0.570 -0.250 -0.414 -0.589
## Wells Fargo        0.155  0.345        -0.497  0.780
## Royal Dutch Shell  0.639 -0.248 -0.642  0.309  0.148
## Exxon Mobil        0.651 -0.322  0.646 -0.216       
## 
##                Comp.1 Comp.2 Comp.3 Comp.4 Comp.5
## SS loadings       1.0    1.0    1.0    1.0    1.0
## Proportion Var    0.2    0.2    0.2    0.2    0.2
## Cumulative Var    0.2    0.4    0.6    0.8    1.0

As we can see from above, the cumulative proportion of variance explained by the first 3 variables is 0.899 percent. We can see from the loadings of each variable the first component increases with stock prices of oil companies, while the second component explains the increase for banks. c.)

#pdf pg 456-457
lambda1_lower <- princomp(stock)$sdev[1]^2 / (1+  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
lambda1_upper <- princomp(stock)$sdev[1]^2 / (1 -  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
c(lambda1_lower, lambda1_upper)
##      Comp.1      Comp.1 
## 0.001044629 0.001925329
lambda2_lower <- princomp(stock)$sdev[2]^2 / (1+  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
lambda2_upper <- princomp(stock)$sdev[2]^2 / (1-  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
c(lambda2_lower, lambda2_upper)
##       Comp.2       Comp.2 
## 0.0005355441 0.0009870470
lambda3_lower <- princomp(stock)$sdev[3]^2 / (1+  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
lambda3_upper <- princomp(stock)$sdev[3]^2 / (1 -  qnorm(0.1/(2*3), lower.tail = FALSE) * sqrt(2/ nrow(stock)))
c(lambda3_lower, lambda3_upper)
##       Comp.3       Comp.3 
## 0.0001938538 0.0003572867

d.) Given the high cumulative proportion of variance explained by the first 2 variables, it would be logical to conclude that we could summarize stock prices in two dimensions.

P3

8.22.) a.)

bulls <- read.table("~/GitHub/STA135/Homework/HW5/T1-10.dat")
names(bulls) <- c("Breed", "SalePr", "YrHgt", "FtFrBody", "PrctFFB", "Frame", "BkFat", "SaleHt", "SaleWt")
breed <- bulls[,1]
bulls <- bulls[,-c(1, 2)]
bulls
##    YrHgt FtFrBody PrctFFB Frame BkFat SaleHt SaleWt
## 1   51.0     1128    70.9     7  0.25   54.8   1720
## 2   51.9     1108    72.1     7  0.25   55.3   1575
## 3   49.9     1011    71.6     6  0.15   53.1   1410
## 4   53.1      993    68.9     8  0.35   56.4   1595
## 5   51.2      996    68.6     7  0.25   55.0   1488
## 6   49.2      985    71.4     6  0.15   51.4   1500
## 7   51.0      959    72.1     7  0.20   54.0   1522
## 8   51.5     1060    69.3     7  0.30   55.6   1765
## 9   50.1      979    71.2     6  0.25   51.5   1365
## 10  49.6     1083    75.8     6  0.30   54.6   1640
## 11  50.6     1036    69.2     6  0.15   54.8   1570
## 12  51.1      870    70.9     7  0.15   52.9   1450
## 13  51.1      998    65.5     7  0.40   54.6   1505
## 14  50.2      973    69.5     6  0.40   53.0   1530
## 15  49.0      893    73.9     6  0.20   51.9   1470
## 16  49.6      975    68.2     6  0.50   52.9   1842
## 17  49.1      997    67.9     6  0.30   54.0   1500
## 18  48.4      946    68.6     5  0.20   51.2   1480
## 19  50.9      928    67.2     6  0.25   54.1   1480
## 20  49.5      963    69.4     6  0.35   53.1   1670
## 21  49.2      911    67.4     6  0.20   53.4   1490
## 22  48.1     1003    70.5     5  0.25   54.7   1748
## 23  51.1      915    64.9     7  0.25   54.6   1725
## 24  48.9      924    72.7     5  0.15   52.1   1374
## 25  49.4      959    68.4     6  0.15   52.6   1565
## 26  47.7      944    66.5     5  0.40   53.3   1556
## 27  50.6      897    67.2     6  0.30   54.9   1688
## 28  48.9      974    71.0     5  0.30   54.2   1722
## 29  49.9      872    70.7     6  0.20   53.3   1325
## 30  48.4      841    71.3     5  0.15   51.5   1365
## 31  48.6      920    71.4     5  0.15   52.9   1450
## 32  47.6      974    69.7     5  0.15   51.9   1570
## 33  50.5     1002    68.8     6  0.20   54.4   1735
## 34  50.2      998    68.7     6  0.15   52.9   1540
## 35  49.0     1015    69.8     6  0.30   51.9   1550
## 36  48.7     1056    72.9     5  0.15   52.6   1525
## 37  49.6      984    71.4     6  0.15   53.4   1650
## 38  48.9      934    66.0     5  0.20   52.1   1430
## 39  49.7      929    66.9     6  0.25   53.3   1688
## 40  49.9      919    67.1     6  0.20   54.3   1425
## 41  47.8      931    67.1     5  0.25   51.5   1520
## 42  49.6      952    69.4     6  0.25   52.3   1512
## 43  51.0     1002    72.1     7  0.25   51.9   1410
## 44  48.6      936    65.3     5  0.35   51.4   1550
## 45  48.3      870    65.6     5  0.30   52.5   1588
## 46  50.1      853    67.9     6  0.15   52.9   1390
## 47  48.8      843    67.3     5  0.20   50.4   1390
## 48  47.7      913    68.2     5  0.15   49.4   1345
## 49  47.2      844    70.6     5  0.15   50.1   1285
## 50  54.0     1252    76.5     8  0.15   56.9   1648
## 51  53.3     1383    81.4     8  0.20   59.6   1904
## 52  52.8     1076    74.0     7  0.15   55.5   1615
## 53  53.5     1175    74.5     8  0.10   57.4   1686
## 54  53.2     1027    71.2     8  0.10   56.9   1696
## 55  52.3     1116    71.1     7  0.10   57.5   1620
## 56  51.8     1095    71.1     7  0.15   54.6   1712
## 57  52.7     1141    78.5     7  0.15   55.6   1572
## 58  54.8     1039    70.6     8  0.10   58.7   1600
## 59  52.8      981    74.1     7  0.10   56.9   1750
## 60  52.4      933    71.5     7  0.10   56.2   1640
## 61  51.2     1083    74.5     7  0.20   55.9   1752
## 62  52.3     1143    77.7     7  0.10   56.1   1785
## 63  53.0     1055    76.8     8  0.10   56.7   1526
## 64  52.9     1037    75.0     7  0.10   55.5   1406
## 65  51.8     1076    74.5     7  0.15   55.8   1475
## 66  53.1      964    70.8     8  0.10   55.5   1535
## 67  51.2     1057    74.8     7  0.10   55.5   1520
## 68  50.8     1040    74.5     6  0.10   55.8   1516
## 69  52.7     1079    75.5     7  0.15   56.1   1595
## 70  51.4     1034    71.2     7  0.10   56.0   1655
## 71  50.7     1012    71.6     6  0.10   54.3   1480
## 72  51.4      997    73.4     7  0.10   55.2   1454
## 73  49.8      991    70.8     6  0.15   54.6   1475
## 74  50.0      928    70.8     6  0.10   53.9   1375
## 75  50.1      990    71.0     6  0.10   54.9   1564
## 76  51.7      992    70.6     7  0.15   55.1   1458
pca.bulls <- princomp(bulls)

summary(princomp(bulls))
## Importance of components:
##                             Comp.1     Comp.2       Comp.3       Comp.4
## Standard deviation     142.5090460 69.3580156 2.3146777818 1.8090529826
## Proportion of Variance   0.8081979  0.1914371 0.0002132131 0.0001302373
## Cumulative Proportion    0.8081979  0.9996351 0.9998482652 0.9999785025
##                              Comp.5       Comp.6       Comp.7
## Standard deviation     6.801921e-01 2.703318e-01 6.678305e-02
## Proportion of Variance 1.841179e-05 2.908220e-06 1.774865e-07
## Cumulative Proportion  9.999969e-01 9.999998e-01 1.000000e+00
plot(pca.bulls$sd^2, type = 'o', xlab = "Principal Component", ylab = "Eigenvalue")

By looking at the cumulative proportion of our components, we can logically conclude that we only need at most two components, which explain over 99% of the variability in the data.

b.)

pca.bulls$loadings
## 
## Loadings:
##          Comp.1 Comp.2 Comp.3 Comp.4 Comp.5 Comp.6 Comp.7
## YrHgt                   0.286  0.609  0.536  0.510       
## FtFrBody  0.487  0.873                                   
## PrctFFB                 0.904 -0.425                     
## Frame                   0.133  0.311  0.391 -0.855       
## BkFat                                               0.999
## SaleHt                  0.284  0.593 -0.749              
## SaleWt    0.873 -0.487                                   
## 
##                Comp.1 Comp.2 Comp.3 Comp.4 Comp.5 Comp.6 Comp.7
## SS loadings     1.000  1.000  1.000  1.000  1.000  1.000  1.000
## Proportion Var  0.143  0.143  0.143  0.143  0.143  0.143  0.143
## Cumulative Var  0.143  0.286  0.429  0.571  0.714  0.857  1.000

We can think of the first component as one regarding the size of a bull, where we have information aboutthe bull at time of sale, in this case the weight. The other component we can think to summarize the information regarding the fat free body in pounds the fat free body in pounds.

c.) Yes, we can, however we only need the variables FtFRBody and SaleWt to tell us meaningful information for a body configuration index. More variables than this would be redundant.

d.)

# http://www.sthda.com/english/wiki/ggfortify-extension-to-ggplot2-to-handle-some-popular-packages-r-software-and-data-visualization
library(ggfortify)
## Loading required package: ggplot2
bulls$Breed <- breed
plot <- autoplot(pca.bulls, loadings = TRUE, loadings.label = TRUE, data = bulls, colour = "Breed") + labs(colour = breed)
#plot
library(plotly)
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
ggplotly(plot)

With the plot we can see that there is no clear clustering forming for breeds, most overlap. One notable outlier is in the top right of our scatter plot, with high values in both components, coming from Breed 8.

e.)

library(car)
## Loading required package: carData
qqPlot(prcomp(bulls)$x[,1])

## [1] 51 49

We can see here that the residuals for the first PCA component from our data is approximately normal. We do have two outliers, at index 49 and 51.

P4

9.1.)

rho <- matrix(c(1, .63, .45, .63, 1, .35, .45, .35, 1), nrow = 3, ncol = 3)
psi <- matrix(c(.19, 0, 0, 0, .51, 0, 0,0, .75), nrow = 3, ncol = 3)
L = c(.9, .7, .5)
L %*% t(L) + psi
##      [,1] [,2] [,3]
## [1,] 1.00 0.63 0.45
## [2,] 0.63 1.00 0.35
## [3,] 0.45 0.35 1.00
#Confirm that they are equal
setequal(rho, L %*% t(L) + psi)
## [1] TRUE

9.2.) a.)

h2 <- L^2
h2
## [1] 0.81 0.49 0.25

b.) By equation 9.5, we know that the \(Cov(X_i , F_j) = l_{ij}\), in this case \([.9, .7, .5]\) Z1 carries the greatest weight on F1, since it has the largest correlation.

9.10.)

#a.)
Lz <- matrix(c(.602 ,.200, .467 ,.154, .926, .143,1.000 ,.000, .874, .476, .894, .327 ), nrow = 6, ncol = 2, byrow = TRUE)

hi <- Lz[,1]^2 + Lz[,2]^2
#Specific Variances 
spvar <- 1 - hi
cat("Specific Variances \n", spvar)
## Specific Variances 
##  0.597596 0.758195 0.122075 0 0.009548 0.093835
Psi <- diag(spvar)
#Psi


#b.)
#Communalities is hi2
cat("\n Communalities \n", hi)
## 
##  Communalities 
##  0.402404 0.241805 0.877925 1 0.990452 0.906165
#c.)
#Proportion of variances
cat("\nProportion of Variances \n")
## 
## Proportion of Variances
hi/ sum(hi)
## [1] 0.09106736 0.05472248 0.19868171 0.22630829 0.22414750 0.20507266
#d.)

R <- matrix(c(1, .505, .569, .602, .621, .603, .505, 1, .422, .467, .482, .450, .569, .422, 1, .926, .877, .878, .602, .467, .926, 1, .874, .894, .621, .482, .877, .874, 1, .937, .603, .450, .878, .894, .937, 1.000 ), nrow = 6, ncol = 6)

cat("Residual Matrix is \n")
## Residual Matrix is
residualMat <- R - (Lz  %*% t(Lz)) - Psi
residualMat
##           [,1]      [,2]      [,3] [,4]      [,5]      [,6]
## [1,]  0.000000  0.193066 -0.017052    0 -0.000348 -0.000588
## [2,]  0.193066  0.000000 -0.032464    0  0.000538 -0.017856
## [3,] -0.017052 -0.032464  0.000000    0 -0.000392  0.003395
## [4,]  0.000000  0.000000  0.000000    0  0.000000  0.000000
## [5,] -0.000348  0.000538 -0.000392    0  0.000000 -0.000008
## [6,] -0.000588 -0.017856  0.003395    0 -0.000008  0.000000